import tensorflow as tf
import os
os.listdir('Downloads/photos_ml')
['.ipynb_checkpoints', 'Mix', 'Train', 'Validation']
from keras.preprocessing.image import array_to_img, img_to_array, load_img
image_1 = load_img('Downloads/photos_ml/Train/S/S10.JPG')
image_1
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense,Conv2D,Flatten,Dropout,MaxPooling2D
from tensorflow.keras.preprocessing.image import ImageDataGenerator
import numpy as np
import matplotlib.pyplot as plt
path = os.path.join('Downloads/photos_ml')
train_dir = os.path.join(path,'Train')
validation_dir = os.path.join(path,'Validation')
train_S_dir = os.path.join(train_dir,'S')
train_M_dir = os.path.join(train_dir,'M')
validation_S_dir = os.path.join(validation_dir,'S')
validation_M_dir = os.path.join(validation_dir,'M')
type(os.listdir(train_S_dir))
list
num_S_tr = len(os.listdir(train_S_dir))
num_M_tr = len(os.listdir(train_M_dir))
num_S_val = len(os.listdir(validation_S_dir))
num_M_val = len(os.listdir(validation_M_dir))
print('S training data ',num_S_tr)
print('M training Data ',num_M_tr)
print('S Valid data ',num_S_val)
print('M valid data ',num_M_val)
S training data 35 M training Data 35 S Valid data 8 M valid data 8
total_train=num_S_tr+num_M_tr
total_val=num_S_val+num_M_val
print('Total Training images',total_train)
print('Total validation images',total_val)
Total Training images 70 Total validation images 16
batch_size = 5
epochs = 7
IMG_HIEGHT = 150
IMG_WIDHT = 150
train_image_generator = ImageDataGenerator(rescale=1./255)
validation_image_generator = ImageDataGenerator(rescale=1./255)
train_data_gen = train_image_generator.flow_from_directory(batch_size=batch_size,
directory=train_dir,
shuffle=True,
target_size=(IMG_HIEGHT,IMG_WIDHT),
class_mode='binary')
Found 70 images belonging to 2 classes.
train_data_gen.class_indices
{'M': 0, 'S': 1}
val_data_gen = validation_image_generator.flow_from_directory(batch_size=batch_size,
directory=validation_dir,
target_size=(IMG_HIEGHT,IMG_WIDHT),
class_mode='binary')
Found 16 images belonging to 2 classes.
sample_train_images, label = next(train_data_gen)
sample_train_images, label = next(train_data_gen)
sample_train_images.shape
(5, 150, 150, 3)
label.shape
(5,)
label
array([1., 0., 1., 1., 1.], dtype=float32)
sample_train_images
array([[[[0.41176474, 0.30588236, 0.2784314 ],
[0.53333336, 0.42352945, 0.40784317],
[0.44705886, 0.2901961 , 0.28235295],
...,
[0.54509807, 0.56078434, 0.43137258],
[0.6117647 , 0.627451 , 0.49803925],
[0.56078434, 0.5647059 , 0.43921572]],
[[0.39607847, 0.2784314 , 0.27058825],
[0.41176474, 0.2901961 , 0.2784314 ],
[0.42352945, 0.2509804 , 0.24705884],
...,
[0.5647059 , 0.5882353 , 0.44705886],
[0.50980395, 0.5254902 , 0.38823533],
[0.50980395, 0.5254902 , 0.3803922 ]],
[[0.37647063, 0.25490198, 0.24313727],
[0.3529412 , 0.20392159, 0.19215688],
[0.40000004, 0.21176472, 0.20392159],
...,
[0.5764706 , 0.6039216 , 0.43921572],
[0.52156866, 0.54901963, 0.38431376],
[0.44705886, 0.47450984, 0.30980393]],
...,
[[0.227451 , 0.20784315, 0.18431373],
[0.29411766, 0.2392157 , 0.20392159],
[0.23529413, 0.13333334, 0.07450981],
...,
[0.01176471, 0.01176471, 0.01176471],
[0.01176471, 0.01176471, 0.01176471],
[0.01176471, 0.01176471, 0.01176471]],
[[0.19215688, 0.15686275, 0.12941177],
[0.227451 , 0.17254902, 0.13725491],
[0.23529413, 0.13333334, 0.08235294],
...,
[0.01176471, 0.01176471, 0.01176471],
[0.01176471, 0.01176471, 0.01176471],
[0.01176471, 0.01176471, 0.01176471]],
[[0.13725491, 0.07843138, 0.0509804 ],
[0.13333334, 0.0627451 , 0.01568628],
[0.25490198, 0.16470589, 0.10196079],
...,
[0.01176471, 0.01176471, 0.01176471],
[0.00784314, 0.00784314, 0.00784314],
[0.01568628, 0.00784314, 0.01176471]]],
[[[0.86274517, 0.86274517, 0.86274517],
[0.86274517, 0.86274517, 0.86274517],
[0.8862746 , 0.8862746 , 0.8862746 ],
...,
[0.9803922 , 0.94117653, 0.9333334 ],
[0.9450981 , 0.90196085, 0.8862746 ],
[0.8745099 , 0.8313726 , 0.8078432 ]],
[[0.86666673, 0.86666673, 0.86666673],
[0.86666673, 0.86666673, 0.86666673],
[0.8862746 , 0.8862746 , 0.8862746 ],
...,
[0.97647065, 0.93725497, 0.9294118 ],
[0.9686275 , 0.92549026, 0.909804 ],
[0.8941177 , 0.85098046, 0.82745105]],
[[0.87843144, 0.87843144, 0.87843144],
[0.8745099 , 0.8745099 , 0.8745099 ],
[0.8862746 , 0.8862746 , 0.8862746 ],
...,
[0.9686275 , 0.9294118 , 0.9215687 ],
[0.9686275 , 0.92549026, 0.909804 ],
[0.8980393 , 0.854902 , 0.8313726 ]],
...,
[[0.79215693, 0.81568635, 0.8078432 ],
[0.8000001 , 0.8196079 , 0.8313726 ],
[0.81568635, 0.82745105, 0.8470589 ],
...,
[0.9058824 , 0.909804 , 0.89019614],
[0.91372555, 0.9176471 , 0.8980393 ],
[0.8941177 , 0.8980393 , 0.87843144]],
[[0.79215693, 0.81568635, 0.8078432 ],
[0.8000001 , 0.8196079 , 0.8313726 ],
[0.81568635, 0.82745105, 0.8470589 ],
...,
[0.9058824 , 0.909804 , 0.89019614],
[0.91372555, 0.9176471 , 0.8980393 ],
[0.8941177 , 0.8980393 , 0.87843144]],
[[0.78823537, 0.8117648 , 0.80392164],
[0.7960785 , 0.81568635, 0.82745105],
[0.81568635, 0.82745105, 0.8470589 ],
...,
[0.909804 , 0.91372555, 0.8941177 ],
[0.9058824 , 0.909804 , 0.89019614],
[0.90196085, 0.9058824 , 0.8862746 ]]],
[[[0.04705883, 0.01960784, 0.0509804 ],
[0.04705883, 0.01960784, 0.0509804 ],
[0.04705883, 0.01960784, 0.0509804 ],
...,
[0.07843138, 0.07058824, 0.02352941],
[0.16078432, 0.14901961, 0.12156864],
[0.04705883, 0.02745098, 0.01176471]],
[[0.04705883, 0.01568628, 0.06666667],
[0.04705883, 0.01960784, 0.0509804 ],
[0.04705883, 0.01960784, 0.0509804 ],
...,
[0.07843138, 0.06666667, 0.03921569],
[0.07843138, 0.05882353, 0.04313726],
[0.03921569, 0.01568628, 0.01568628]],
[[0.04705883, 0.01960784, 0.0509804 ],
[0.04705883, 0.01960784, 0.0509804 ],
[0.05490196, 0.02745098, 0.05882353],
...,
[0.0627451 , 0.0509804 , 0.02352941],
[0.06666667, 0.04705883, 0.02352941],
[0.0509804 , 0.03137255, 0.01568628]],
...,
[[0.27450982, 0.2509804 , 0.25882354],
[0.4431373 , 0.41960788, 0.41960788],
[0.22352943, 0.20784315, 0.20392159],
...,
[0.17254902, 0.16862746, 0.18823531],
[0.10196079, 0.09803922, 0.11764707],
[0.10588236, 0.10196079, 0.12156864]],
[[0.20392159, 0.18039216, 0.18823531],
[0.24313727, 0.227451 , 0.23137257],
[0.227451 , 0.20392159, 0.20392159],
...,
[0.10980393, 0.10588236, 0.1254902 ],
[0.14901961, 0.14509805, 0.16470589],
[0.10588236, 0.10196079, 0.12156864]],
[[0.21176472, 0.21176472, 0.20392159],
[0.23529413, 0.21960786, 0.21568629],
[0.21960786, 0.20392159, 0.20000002],
...,
[0.1254902 , 0.12156864, 0.14117648],
[0.1254902 , 0.12156864, 0.14117648],
[0.15294118, 0.15294118, 0.15294118]]],
[[[0.7411765 , 0.7294118 , 0.7019608 ],
[0.74509805, 0.73333335, 0.7058824 ],
[0.74509805, 0.73333335, 0.7058824 ],
...,
[0.6 , 0.5686275 , 0.5254902 ],
[0.5176471 , 0.5019608 , 0.45882356],
[0.5568628 , 0.52156866, 0.48627454]],
[[0.74509805, 0.7294118 , 0.6862745 ],
[0.7490196 , 0.73333335, 0.6901961 ],
[0.7490196 , 0.73333335, 0.6901961 ],
...,
[0.5568628 , 0.5411765 , 0.49411768],
[0.53333336, 0.5176471 , 0.47450984],
[0.5529412 , 0.5372549 , 0.49411768]],
[[0.7490196 , 0.7294118 , 0.7058824 ],
[0.7490196 , 0.7294118 , 0.7058824 ],
[0.7490196 , 0.7294118 , 0.7058824 ],
...,
[0.5568628 , 0.5411765 , 0.49803925],
[0.5568628 , 0.5411765 , 0.49803925],
[0.59607846, 0.5803922 , 0.5372549 ]],
...,
[[0.8431373 , 0.8313726 , 0.80392164],
[0.85098046, 0.8313726 , 0.8078432 ],
[0.8431373 , 0.8313726 , 0.80392164],
...,
[0.2784314 , 0.28627452, 0.27450982],
[0.227451 , 0.23529413, 0.22352943],
[0.21960786, 0.21960786, 0.21176472]],
[[0.8588236 , 0.83921576, 0.81568635],
[0.8470589 , 0.82745105, 0.80392164],
[0.8470589 , 0.8352942 , 0.8078432 ],
...,
[0.32941177, 0.3372549 , 0.33333334],
[0.25882354, 0.26666668, 0.2627451 ],
[0.2509804 , 0.25882354, 0.24705884]],
[[0.8470589 , 0.82745105, 0.80392164],
[0.85098046, 0.8313726 , 0.8078432 ],
[0.8470589 , 0.8352942 , 0.8078432 ],
...,
[0.32941177, 0.3372549 , 0.33333334],
[0.32156864, 0.32941177, 0.3254902 ],
[0.28627452, 0.29411766, 0.2901961 ]]],
[[[0.9921569 , 0.9921569 , 0.9921569 ],
[0.9921569 , 0.9921569 , 0.9921569 ],
[0.9921569 , 0.9921569 , 0.9921569 ],
...,
[0.9921569 , 0.9921569 , 0.9921569 ],
[0.9921569 , 0.9921569 , 0.9921569 ],
[0.9921569 , 0.9921569 , 0.9921569 ]],
[[0.9921569 , 0.9921569 , 0.9921569 ],
[0.9921569 , 0.9921569 , 0.9921569 ],
[0.9921569 , 0.9921569 , 0.9921569 ],
...,
[0.9921569 , 0.9921569 , 0.9921569 ],
[0.9921569 , 0.9921569 , 0.9921569 ],
[0.9921569 , 0.9921569 , 0.9921569 ]],
[[0.9921569 , 0.9921569 , 0.9921569 ],
[0.9921569 , 0.9921569 , 0.9921569 ],
[0.9921569 , 0.9921569 , 0.9921569 ],
...,
[0.9921569 , 0.9921569 , 0.9921569 ],
[0.9921569 , 0.9921569 , 0.9921569 ],
[0.9921569 , 0.9921569 , 0.9921569 ]],
...,
[[0.07843138, 0.09019608, 0.1254902 ],
[0.13725491, 0.12156864, 0.10980393],
[0.1764706 , 0.12941177, 0.08235294],
...,
[0.81568635, 0.76470596, 0.7294118 ],
[0.81568635, 0.76470596, 0.7294118 ],
[0.81568635, 0.76470596, 0.7294118 ]],
[[0.0509804 , 0.09019608, 0.09411766],
[0.10980393, 0.11764707, 0.1137255 ],
[0.1764706 , 0.16078432, 0.14901961],
...,
[0.8078432 , 0.7568628 , 0.72156864],
[0.8078432 , 0.7568628 , 0.72156864],
[0.8078432 , 0.7568628 , 0.72156864]],
[[0.04705883, 0.07843138, 0.08627451],
[0.08627451, 0.10980393, 0.10196079],
[0.10980393, 0.12941177, 0.10588236],
...,
[0.8078432 , 0.7568628 , 0.72156864],
[0.8078432 , 0.7568628 , 0.72156864],
[0.8078432 , 0.7568628 , 0.72156864]]]], dtype=float32)
plt.imshow(sample_train_images[0])
plt.show()
label[3]
1.0
model = Sequential([
Conv2D(16,3,padding='same',activation='relu',input_shape=(IMG_HIEGHT,IMG_WIDHT,3)),
MaxPooling2D(),
Conv2D(32,3,padding='same',activation='relu'),
MaxPooling2D(),
Conv2D(16,3,padding='same',activation='relu'),
MaxPooling2D(),
Flatten(),
Dense(64,activation='relu'),
Dense(1,activation='sigmoid')
])
model.summary()
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv2d (Conv2D) (None, 150, 150, 16) 448 _________________________________________________________________ max_pooling2d (MaxPooling2D) (None, 75, 75, 16) 0 _________________________________________________________________ conv2d_1 (Conv2D) (None, 75, 75, 32) 4640 _________________________________________________________________ max_pooling2d_1 (MaxPooling2 (None, 37, 37, 32) 0 _________________________________________________________________ conv2d_2 (Conv2D) (None, 37, 37, 16) 4624 _________________________________________________________________ max_pooling2d_2 (MaxPooling2 (None, 18, 18, 16) 0 _________________________________________________________________ flatten (Flatten) (None, 5184) 0 _________________________________________________________________ dense (Dense) (None, 64) 331840 _________________________________________________________________ dense_1 (Dense) (None, 1) 65 ================================================================= Total params: 341,617 Trainable params: 341,617 Non-trainable params: 0 _________________________________________________________________
model.compile(optimizer='adam',loss=tf.keras.losses.BinaryCrossentropy(from_logits=True),metrics=['accuracy'])
history = model.fit_generator(train_data_gen,steps_per_epoch=total_train//batch_size,epochs=epochs,
validation_data=val_data_gen,validation_steps=total_val//batch_size)
C:\Users\Shivani\.conda\envs\deepl\lib\site-packages\keras\engine\training.py:1972: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.
warnings.warn('`Model.fit_generator` is deprecated and '
Epoch 1/7
C:\Users\Shivani\.conda\envs\deepl\lib\site-packages\keras\backend.py:4994: UserWarning: "`binary_crossentropy` received `from_logits=True`, but the `output` argument was produced by a sigmoid or softmax activation and thus does not represent logits. Was this intended?" '"`binary_crossentropy` received `from_logits=True`, but the `output`'
14/14 [==============================] - 12s 633ms/step - loss: 0.7483 - accuracy: 0.5429 - val_loss: 0.6864 - val_accuracy: 0.4667 Epoch 2/7 14/14 [==============================] - 9s 672ms/step - loss: 0.6855 - accuracy: 0.5714 - val_loss: 0.6950 - val_accuracy: 0.4667 Epoch 3/7 14/14 [==============================] - 8s 565ms/step - loss: 0.6148 - accuracy: 0.6000 - val_loss: 0.6064 - val_accuracy: 0.8000 Epoch 4/7 14/14 [==============================] - 8s 529ms/step - loss: 0.4374 - accuracy: 0.8000 - val_loss: 0.6074 - val_accuracy: 0.7333 Epoch 5/7 14/14 [==============================] - 8s 609ms/step - loss: 0.3924 - accuracy: 0.8714 - val_loss: 0.4680 - val_accuracy: 0.8000 Epoch 6/7 14/14 [==============================] - 7s 526ms/step - loss: 0.1841 - accuracy: 0.9429 - val_loss: 0.9202 - val_accuracy: 0.6000 Epoch 7/7 14/14 [==============================] - 8s 566ms/step - loss: 0.2319 - accuracy: 0.9143 - val_loss: 0.3937 - val_accuracy: 0.8000
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs_range = range(epochs)
plt.figure(figsize=(8,4))
plt.subplot(1,2,1)
plt.plot(epochs_range,acc,label='Training Accuracy')
plt.plot(epochs_range,val_acc,label='Val Accuracy')
plt.legend(loc='lower right')
plt.title('Accuracy')
plt.subplot(1,2,2)
plt.plot(epochs_range,loss,label='Training Loss')
plt.plot(epochs_range,val_loss,label='Val LOss')
plt.legend(loc='lower right')
plt.title('Loss')
plt.show()
#data augmentation
image_gen_train = ImageDataGenerator(rescale=1./255,
rotation_range=45,
width_shift_range=.15,
height_shift_range=.15,
horizontal_flip=True,
zoom_range=0.5)
train_data_gen = image_gen_train.flow_from_directory(batch_size=batch_size,
directory=train_dir,
shuffle=True,
target_size=(IMG_HIEGHT,IMG_HIEGHT),
class_mode='binary')
Found 70 images belonging to 2 classes.
def plotImages(images_arr):
fig, axes = plt.subplots(1,5,figsize=(20,20))
axes = axes.flatten()
for img,ax in zip(images_arr,axes):
ax.imshow(img)
ax.axis('off')
plt.tight_layout()
plt.show()
augmented_images = [train_data_gen[0][0][0] for i in range(5)]
plotImages(augmented_images)
image_gen_val = ImageDataGenerator(rescale=1./255)
val_data_gen = image_gen_val.flow_from_directory(batch_size=batch_size,
directory=validation_dir,
target_size=(IMG_HIEGHT,IMG_WIDHT),
class_mode='binary')
Found 16 images belonging to 2 classes.
model_new = Sequential([
Conv2D(16,3,padding='same',activation='relu',input_shape=(IMG_HIEGHT,IMG_WIDHT,3)),
MaxPooling2D(),
Dropout(0.4),
Conv2D(16,3,padding='same',activation='relu'),
MaxPooling2D(),
Dropout(0.4),
Conv2D(32,3,padding='same',activation='relu'),
MaxPooling2D(),
Dropout(0.4),
Flatten(),
Dense(64,activation='relu'),
Dropout(0.4),
Dense(1,activation='sigmoid')
])
model_new.summary()
Model: "sequential_2" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv2d_6 (Conv2D) (None, 150, 150, 16) 448 _________________________________________________________________ max_pooling2d_6 (MaxPooling2 (None, 75, 75, 16) 0 _________________________________________________________________ dropout_4 (Dropout) (None, 75, 75, 16) 0 _________________________________________________________________ conv2d_7 (Conv2D) (None, 75, 75, 16) 2320 _________________________________________________________________ max_pooling2d_7 (MaxPooling2 (None, 37, 37, 16) 0 _________________________________________________________________ dropout_5 (Dropout) (None, 37, 37, 16) 0 _________________________________________________________________ conv2d_8 (Conv2D) (None, 37, 37, 32) 4640 _________________________________________________________________ max_pooling2d_8 (MaxPooling2 (None, 18, 18, 32) 0 _________________________________________________________________ dropout_6 (Dropout) (None, 18, 18, 32) 0 _________________________________________________________________ flatten_2 (Flatten) (None, 10368) 0 _________________________________________________________________ dense_4 (Dense) (None, 64) 663616 _________________________________________________________________ dropout_7 (Dropout) (None, 64) 0 _________________________________________________________________ dense_5 (Dense) (None, 1) 65 ================================================================= Total params: 671,089 Trainable params: 671,089 Non-trainable params: 0 _________________________________________________________________
model_new.compile(optimizer='adam',loss='binary_crossentropy',metrics=['accuracy'])
history = model_new.fit_generator(train_data_gen,steps_per_epoch=total_train//batch_size,
epochs=epochs,
validation_data=val_data_gen,
validation_steps=total_val//batch_size)
Epoch 1/7 14/14 [==============================] - 14s 1s/step - loss: 0.6836 - accuracy: 0.5571 - val_loss: 0.6882 - val_accuracy: 0.5333 Epoch 2/7 14/14 [==============================] - 12s 838ms/step - loss: 0.6787 - accuracy: 0.5857 - val_loss: 0.6894 - val_accuracy: 0.4667 Epoch 3/7 14/14 [==============================] - 15s 1s/step - loss: 0.6644 - accuracy: 0.6571 - val_loss: 0.6936 - val_accuracy: 0.6000 Epoch 4/7 14/14 [==============================] - 18s 1s/step - loss: 0.6751 - accuracy: 0.5857 - val_loss: 0.6901 - val_accuracy: 0.4667 Epoch 5/7 14/14 [==============================] - 20s 1s/step - loss: 0.6746 - accuracy: 0.7143 - val_loss: 0.6893 - val_accuracy: 0.4667 Epoch 6/7 14/14 [==============================] - 13s 881ms/step - loss: 0.6602 - accuracy: 0.5857 - val_loss: 0.6917 - val_accuracy: 0.4000 Epoch 7/7 14/14 [==============================] - 14s 971ms/step - loss: 0.6515 - accuracy: 0.6857 - val_loss: 0.6917 - val_accuracy: 0.4667
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs_range = range(epochs)
plt.figure(figsize=(8,4))
plt.subplot(1,2,1)
plt.plot(epochs_range,acc,label='Training Accuracy')
plt.plot(epochs_range,val_acc,label='Val Accuracy')
plt.legend(loc='lower right')
plt.title('Accuracy')
plt.subplot(1,2,2)
plt.plot(epochs_range,loss,label='Training Loss')
plt.plot(epochs_range,val_loss,label='Val LOss')
plt.legend(loc='upper right')
plt.title('Loss')
plt.show()
model_new_1 = Sequential([
Conv2D(16,3,activation='relu',input_shape=(IMG_HIEGHT,IMG_WIDHT,3)),
MaxPooling2D(),
Dropout(0.4),
Conv2D(16,3,activation='relu'),
MaxPooling2D(),
Dropout(0.4),
Conv2D(32,3,activation='relu'),
MaxPooling2D(),
Dropout(0.4),
Flatten(),
Dense(64,activation='relu'),
Dropout(0.4),
Dense(1,activation='sigmoid')
])
model_new_1.summary()
Model: "sequential_3" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv2d_9 (Conv2D) (None, 148, 148, 16) 448 _________________________________________________________________ max_pooling2d_9 (MaxPooling2 (None, 74, 74, 16) 0 _________________________________________________________________ dropout_8 (Dropout) (None, 74, 74, 16) 0 _________________________________________________________________ conv2d_10 (Conv2D) (None, 72, 72, 16) 2320 _________________________________________________________________ max_pooling2d_10 (MaxPooling (None, 36, 36, 16) 0 _________________________________________________________________ dropout_9 (Dropout) (None, 36, 36, 16) 0 _________________________________________________________________ conv2d_11 (Conv2D) (None, 34, 34, 32) 4640 _________________________________________________________________ max_pooling2d_11 (MaxPooling (None, 17, 17, 32) 0 _________________________________________________________________ dropout_10 (Dropout) (None, 17, 17, 32) 0 _________________________________________________________________ flatten_3 (Flatten) (None, 9248) 0 _________________________________________________________________ dense_6 (Dense) (None, 64) 591936 _________________________________________________________________ dropout_11 (Dropout) (None, 64) 0 _________________________________________________________________ dense_7 (Dense) (None, 1) 65 ================================================================= Total params: 599,409 Trainable params: 599,409 Non-trainable params: 0 _________________________________________________________________
model_new_1.compile(optimizer='adam',loss='binary_crossentropy',metrics=['accuracy'])
history = model_new_1.fit_generator(train_data_gen,steps_per_epoch=total_train//batch_size,
epochs=epochs,
validation_data=val_data_gen,
validation_steps=total_val//batch_size)
Epoch 1/7 14/14 [==============================] - 23s 2s/step - loss: 0.6899 - accuracy: 0.5286 - val_loss: 0.6934 - val_accuracy: 0.4000 Epoch 2/7 14/14 [==============================] - 18s 1s/step - loss: 0.7060 - accuracy: 0.5000 - val_loss: 0.6922 - val_accuracy: 0.5333 Epoch 3/7 14/14 [==============================] - 15s 1s/step - loss: 0.6882 - accuracy: 0.5429 - val_loss: 0.6927 - val_accuracy: 0.4667 Epoch 4/7 14/14 [==============================] - 13s 934ms/step - loss: 0.6911 - accuracy: 0.5857 - val_loss: 0.6900 - val_accuracy: 0.5333 Epoch 5/7 14/14 [==============================] - 20s 1s/step - loss: 0.6810 - accuracy: 0.5857 - val_loss: 0.6923 - val_accuracy: 0.6000 Epoch 6/7 14/14 [==============================] - 18s 1s/step - loss: 0.6933 - accuracy: 0.5000 - val_loss: 0.6928 - val_accuracy: 0.6000 Epoch 7/7 14/14 [==============================] - 13s 859ms/step - loss: 0.6726 - accuracy: 0.5857 - val_loss: 0.6987 - val_accuracy: 0.5333
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs_range = range(epochs)
plt.figure(figsize=(8,4))
plt.subplot(1,2,1)
plt.plot(epochs_range,acc,label='Training Accuracy')
plt.plot(epochs_range,val_acc,label='Val Accuracy')
plt.legend(loc='lower right')
plt.title('Accuracy')
plt.subplot(1,2,2)
plt.plot(epochs_range,loss,label='Training Loss')
plt.plot(epochs_range,val_loss,label='Val LOss')
plt.legend(loc='upper right')
plt.title('Loss')
plt.show()
from keras.preprocessing.image import array_to_img, img_to_array, load_img
import tensorflow as tf
model_new_1.summary()
Model: "sequential_3" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv2d_9 (Conv2D) (None, 148, 148, 16) 448 _________________________________________________________________ max_pooling2d_9 (MaxPooling2 (None, 74, 74, 16) 0 _________________________________________________________________ dropout_8 (Dropout) (None, 74, 74, 16) 0 _________________________________________________________________ conv2d_10 (Conv2D) (None, 72, 72, 16) 2320 _________________________________________________________________ max_pooling2d_10 (MaxPooling (None, 36, 36, 16) 0 _________________________________________________________________ dropout_9 (Dropout) (None, 36, 36, 16) 0 _________________________________________________________________ conv2d_11 (Conv2D) (None, 34, 34, 32) 4640 _________________________________________________________________ max_pooling2d_11 (MaxPooling (None, 17, 17, 32) 0 _________________________________________________________________ dropout_10 (Dropout) (None, 17, 17, 32) 0 _________________________________________________________________ flatten_3 (Flatten) (None, 9248) 0 _________________________________________________________________ dense_6 (Dense) (None, 64) 591936 _________________________________________________________________ dropout_11 (Dropout) (None, 64) 0 _________________________________________________________________ dense_7 (Dense) (None, 1) 65 ================================================================= Total params: 599,409 Trainable params: 599,409 Non-trainable params: 0 _________________________________________________________________
from keras.preprocessing.image import array_to_img, img_to_array, load_img
test1=load_img('test1.jpg',target_size=(150,150,3))
data_2 = img_to_array(test1)
data_2
array([[[180., 181., 173.],
[183., 181., 169.],
[184., 186., 173.],
...,
[230., 226., 227.],
[226., 226., 226.],
[227., 227., 227.]],
[[178., 179., 171.],
[180., 180., 172.],
[179., 180., 172.],
...,
[227., 227., 227.],
[226., 226., 226.],
[227., 227., 227.]],
[[184., 186., 173.],
[186., 183., 176.],
[180., 181., 173.],
...,
[226., 226., 226.],
[227., 227., 227.],
[227., 227., 227.]],
...,
[[181., 182., 176.],
[191., 192., 186.],
[189., 190., 184.],
...,
[ 88., 79., 74.],
[ 90., 80., 78.],
[ 90., 81., 76.]],
[[189., 190., 184.],
[187., 188., 182.],
[186., 187., 181.],
...,
[ 89., 80., 75.],
[ 90., 81., 76.],
[ 89., 80., 75.]],
[[186., 189., 182.],
[183., 185., 180.],
[187., 190., 183.],
...,
[ 95., 86., 81.],
[ 87., 78., 73.],
[ 90., 81., 76.]]], dtype=float32)
img_test = data_2.reshape(1,150,150,3)
img_test.shape
(1, 150, 150, 3)
pred = model.predict(img_test)
pred.round()
array([[1.]], dtype=float32)